其他
Android特效视频Surface+Camera2的实现
本文作者
作者:newki
链接:
https://juejin.cn/post/7262358127345762364
本文由作者授权发布。
前言
Android 三种 Camera 分别如何预览,有什么区别? 三种 Camera 回调的数据 byte[] 格式有什么区别?如何转换如何旋转? 录制视频中常用的 NV21,I420,Surface 三种输入格式对哪一种COLOR_FORMAT完成编码? 如何配置 MediaCodec 的基本配置,帧率,分辨率,比特率,关键I帧的概念是否大致清楚。
话不多说,咱们边走边说。
初始化工具类的时候,创建音频编码的线程 HandlerThread 与 视频编码的线程 HandlerThread。同时配置音频编码的配置与视频编码的配置。 当启动录制的时候,启动音频编码与视频编码,启动音频录制器,创建封装合成器。并通过各自的子线程开始编码。 音频编码器中正常写入同步时间戳之后,当完成编码发送给封装合成器去写入。 视频编码器中正常写入同步时间戳之后,当完成编码发送给封装合成器去写入。 在视频编码中通过写入停止录制的信号,判断当前是否需要真正完成录制,并且回调出去结果。
public VideoCaptureUtils(@NonNull RecordConfig config, Size size) {
this.mRecordConfig = config;
this.mResolutionSize = size;
// 初始化音视频编码线程
mVideoHandlerThread = new HandlerThread(CameraXThreads.TAG + "video encoding thread");
mAudioHandlerThread = new HandlerThread(CameraXThreads.TAG + "audio encoding thread");
// 启动视频线程
mVideoHandlerThread.start();
mVideoHandler = new Handler(mVideoHandlerThread.getLooper());
// 启动音频线程
mAudioHandlerThread.start();
mAudioHandler = new Handler(mAudioHandlerThread.getLooper());
if (mCameraSurface != null) {
mVideoEncoder.stop();
mVideoEncoder.release();
mAudioEncoder.stop();
mAudioEncoder.release();
releaseCameraSurface(false);
}
try {
mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
mAudioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
} catch (IOException e) {
throw new IllegalStateException("Unable to create MediaCodec due to: " + e.getCause());
}
//设置音视频编码器与音频录制器
setupEncoder();
}
void setupEncoder() {
// 初始化视频编码器
mVideoEncoder.reset();
mVideoEncoder.configure(createVideoMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (mCameraSurface != null) {
releaseCameraSurface(false);
}
//用于输入的Surface
mCameraSurface = mVideoEncoder.createInputSurface();
//初始化音频编码器
mAudioEncoder.reset();
mAudioEncoder.configure(createAudioMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
//初始化音频录制器
if (mAudioRecorder != null) {
mAudioRecorder.release();
}
mAudioRecorder = autoConfigAudioRecordSource();
if (mAudioRecorder == null) {
Log.e(TAG, "AudioRecord object cannot initialized correctly!");
}
//重置音视频轨道,设置未开始录制
mVideoTrackIndex = -1;
mAudioTrackIndex = -1;
mIsRecording = false;
}
public void startRecording(
@NonNull OutputFileOptions outputFileOptions,
@NonNull Executor executor,
@NonNull OnVideoSavedCallback callback) {
if (Looper.getMainLooper() != Looper.myLooper()) {
CameraXExecutors.mainThreadExecutor().execute(() -> startRecording(outputFileOptions, executor, callback));
return;
}
Log.d(TAG, "startRecording");
mIsFirstVideoSampleWrite.set(false);
mIsFirstAudioSampleWrite.set(false);
VideoSavedListenerWrapper postListener = new VideoSavedListenerWrapper(executor, callback);
//重复录制的错误
if (!mEndOfAudioVideoSignal.get()) {
postListener.onError(ERROR_RECORDING_IN_PROGRESS, "It is still in video recording!", null);
return;
}
try {
// 启动音频录制器
mAudioRecorder.startRecording();
} catch (IllegalStateException e) {
postListener.onError(ERROR_ENCODER, "AudioRecorder start fail", e);
return;
}
try {
// 音视频编码器启动
Log.d(TAG, "audioEncoder and videoEncoder all start");
mVideoEncoder.start();
mAudioEncoder.start();
} catch (IllegalStateException e) {
postListener.onError(ERROR_ENCODER, "Audio/Video encoder start fail", e);
return;
}
//启动封装器
try {
synchronized (mMediaMuxerLock) {
mMediaMuxer = initMediaMuxer(outputFileOptions);
Preconditions.checkNotNull(mMediaMuxer);
mMediaMuxer.setOrientationHint(90); //设置视频文件的方向,参数表示视频文件应该被旋转的角度
Metadata metadata = outputFileOptions.getMetadata();
if (metadata != null && metadata.location != null) {
mMediaMuxer.setLocation(
(float) metadata.location.getLatitude(),
(float) metadata.location.getLongitude());
}
}
} catch (IOException e) {
postListener.onError(ERROR_MUXER, "MediaMuxer creation failed!", e);
return;
}
//设置开始录制的Flag变量
mEndOfVideoStreamSignal.set(false);
mEndOfAudioStreamSignal.set(false);
mEndOfAudioVideoSignal.set(false);
mIsRecording = true;
//子线程开启编码音频
mAudioHandler.post(() -> audioEncode(postListener));
//子线程开启编码视频
mVideoHandler.post(() -> {
boolean errorOccurred = videoEncode(postListener);
if (!errorOccurred) {
postListener.onVideoSaved(new OutputFileResults(mSavedVideoUri));
mSavedVideoUri = null;
}
});
}
/**
* 具体的音频编码方法,子线程中执行编码逻辑,无限执行知道录制结束。
* 当编码完成之后写入到缓冲区
*/
boolean audioEncode(OnVideoSavedCallback videoSavedCallback) {
// Audio encoding loop. Exits on end of stream.
boolean audioEos = false;
int outIndex;
while (!audioEos && mIsRecording && mAudioEncoder != null) {
// Check for end of stream from main thread
if (mEndOfAudioStreamSignal.get()) {
mEndOfAudioStreamSignal.set(false);
mIsRecording = false;
}
// get audio deque input buffer
if (mAudioEncoder != null) {
int index = mAudioEncoder.dequeueInputBuffer(-1);
if (index >= 0) {
final ByteBuffer buffer = getInputBuffer(mAudioEncoder, index);
buffer.clear();
int length = mAudioRecorder.read(buffer, mAudioBufferSize);
if (length > 0) {
mAudioEncoder.queueInputBuffer(
index,
0,
length,
(System.nanoTime() / 1000),
mIsRecording ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
// start to dequeue audio output buffer
do {
outIndex = mAudioEncoder.dequeueOutputBuffer(mAudioBufferInfo, 0);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
synchronized (mMediaMuxerLock) {
mAudioTrackIndex = mMediaMuxer.addTrack(mAudioEncoder.getOutputFormat());
Log.d(TAG, "mAudioTrackIndex:" + mAudioTrackIndex + "mVideoTrackIndex:" + mVideoTrackIndex);
if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0) {
mMuxerStarted = true;
Log.d(TAG, "media mMuxer start by audio");
mMediaMuxer.start();
}
}
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
audioEos = writeAudioEncodedBuffer(outIndex);
}
} while (outIndex >= 0 && !audioEos);
}
}
//当循环结束,说明停止录制了,停止音频录制器
try {
Log.d(TAG, "audioRecorder stop");
mAudioRecorder.stop();
} catch (IllegalStateException e) {
videoSavedCallback.onError(ERROR_ENCODER, "Audio recorder stop failed!", e);
}
//停止音频编码器
try {
mAudioEncoder.stop();
} catch (IllegalStateException e) {
videoSavedCallback.onError(ERROR_ENCODER, "Audio encoder stop failed!", e);
}
Log.d(TAG, "Audio encode thread end");
mEndOfVideoStreamSignal.set(true);
return false;
}
/**
* 将已编码《音频流》写入缓冲区
*/
private boolean writeAudioEncodedBuffer(int bufferIndex) {
ByteBuffer buffer = getOutputBuffer(mAudioEncoder, bufferIndex);
buffer.position(mAudioBufferInfo.offset);
if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0
&& mAudioBufferInfo.size > 0
&& mAudioBufferInfo.presentationTimeUs > 0) {
try {
synchronized (mMediaMuxerLock) {
if (!mIsFirstAudioSampleWrite.get()) {
Log.d(TAG, "First audio sample written.");
mIsFirstAudioSampleWrite.set(true);
}
mMediaMuxer.writeSampleData(mAudioTrackIndex, buffer, mAudioBufferInfo);
}
} catch (Exception e) {
Log.e(TAG, "audio error:size="
+ mAudioBufferInfo.size
+ "/offset="
+ mAudioBufferInfo.offset
+ "/timeUs="
+ mAudioBufferInfo.presentationTimeUs);
e.printStackTrace();
}
}
mAudioEncoder.releaseOutputBuffer(bufferIndex, false);
return (mAudioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
}
/**
* 具体的视频编码方法,子线程中执行编码逻辑,无限执行知道录制结束。
* 当编码完成之后写入到缓冲区
*/
boolean videoEncode(@NonNull OnVideoSavedCallback videoSavedCallback) {
// Main encoding loop. Exits on end of stream.
boolean errorOccurred = false;
boolean videoEos = false;
while (!videoEos && !errorOccurred && mVideoEncoder != null) {
// Check for end of stream from main thread
if (mEndOfVideoStreamSignal.get()) {
mVideoEncoder.signalEndOfInputStream();
mEndOfVideoStreamSignal.set(false);
}
// Deque buffer to check for processing step
int outputBufferId = mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, DEQUE_TIMEOUT_USEC);
switch (outputBufferId) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mMuxerStarted) {
videoSavedCallback.onError(ERROR_ENCODER, "Unexpected change in video encoding format.", null);
errorOccurred = true;
}
synchronized (mMediaMuxerLock) {
mVideoTrackIndex = mMediaMuxer.addTrack(mVideoEncoder.getOutputFormat());
Log.d(TAG, "mAudioTrackIndex:" + mAudioTrackIndex + "mVideoTrackIndex:" + mVideoTrackIndex);
if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0) {
mMuxerStarted = true;
Log.i(TAG, "media mMuxer start by video");
mMediaMuxer.start();
}
}
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// Timed out. Just wait until next attempt to deque.
break;
default:
videoEos = writeVideoEncodedBuffer(outputBufferId);
}
}
//如果循环结束,说明录制完成,停止视频编码器,释放资源
try {
Log.i(TAG, "videoEncoder stop");
mVideoEncoder.stop();
} catch (IllegalStateException e) {
videoSavedCallback.onError(ERROR_ENCODER, "Video encoder stop failed!", e);
errorOccurred = true;
}
//因为视频编码会更耗时,所以在此停止封装器的执行
try {
synchronized (mMediaMuxerLock) {
if (mMediaMuxer != null) {
if (mMuxerStarted) {
mMediaMuxer.stop();
}
mMediaMuxer.release();
mMediaMuxer = null;
}
}
} catch (IllegalStateException e) {
videoSavedCallback.onError(ERROR_MUXER, "Muxer stop failed!", e);
errorOccurred = true;
}
if (mParcelFileDescriptor != null) {
try {
mParcelFileDescriptor.close();
mParcelFileDescriptor = null;
} catch (IOException e) {
videoSavedCallback.onError(ERROR_MUXER, "File descriptor close failed!", e);
errorOccurred = true;
}
}
//设置一些Flag为停止状态
mMuxerStarted = false;
mEndOfAudioVideoSignal.set(true);
Log.d(TAG, "Video encode thread end.");
return errorOccurred;
}
/**
* 将已编码的《视频流》写入缓冲区
*/
private boolean writeVideoEncodedBuffer(int bufferIndex) {
if (bufferIndex < 0) {
Log.e(TAG, "Output buffer should not have negative index: " + bufferIndex);
return false;
}
// Get data from buffer
ByteBuffer outputBuffer = mVideoEncoder.getOutputBuffer(bufferIndex);
// Check if buffer is valid, if not then return
if (outputBuffer == null) {
Log.d(TAG, "OutputBuffer was null.");
return false;
}
// Write data to mMuxer if available
if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0 && mVideoBufferInfo.size > 0) {
outputBuffer.position(mVideoBufferInfo.offset);
outputBuffer.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size);
mVideoBufferInfo.presentationTimeUs = (System.nanoTime() / 1000);
synchronized (mMediaMuxerLock) {
if (!mIsFirstVideoSampleWrite.get()) {
Log.d(TAG, "First video sample written.");
mIsFirstVideoSampleWrite.set(true);
}
Log.d(TAG, "write video Data");
mMediaMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, mVideoBufferInfo);
}
}
// Release data
mVideoEncoder.releaseOutputBuffer(bufferIndex, false);
// Return true if EOS is set
return (mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
}
/**
* 停止录制
*/
public void stopRecording() {
if (Looper.getMainLooper() != Looper.myLooper()) {
CameraXExecutors.mainThreadExecutor().execute(() -> stopRecording());
return;
}
Log.d(TAG, "stopRecording");
if (!mEndOfAudioVideoSignal.get() && mIsRecording) {
// 停止音频编码器线程并等待视频编码器与封装器停止
mEndOfAudioStreamSignal.set(true);
}
}
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public void release() {
stopRecording();
if (mRecordingFuture != null) {
mRecordingFuture.addListener(() -> releaseResources(),
CameraXExecutors.mainThreadExecutor());
} else {
releaseResources();
}
}
private void releaseResources() {
mVideoHandlerThread.quitSafely();
mAudioHandlerThread.quitSafely();
if (mAudioEncoder != null) {
mAudioEncoder.release();
mAudioEncoder = null;
}
if (mAudioRecorder != null) {
mAudioRecorder.release();
mAudioRecorder = null;
}
if (mCameraSurface != null) {
releaseCameraSurface(true);
}
}
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); InputSurface = mVideoEncoder.createInputSurface();
public class Camera2SurfaceProvider extends BaseCommonCameraProvider {
public Camera2SurfaceProvider(Activity mContext) {
super(mContext);
...
}
private void initCamera() {
...
if (mCameraInfoListener != null) {
mCameraInfoListener.getBestSize(outputSize);
//初始化录制工具类
VideoCaptureUtils.RecordConfig recordConfig = new VideoCaptureUtils.RecordConfig.Builder().build();
//Surface 录制工具类
videoCaptureUtils = new VideoCaptureUtils(recordConfig, outputSize);
}
}
public void startPreviewSession(Size size) {
try {
releaseCameraSession(session);
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
...
//添加预览的TextureView
Surface previewSurface = new Surface(surfaceTexture);
mPreviewBuilder.addTarget(previewSurface);
outputs.add(previewSurface);
//这里设置输入Surface编码的数据源
//使用 mVideoEncoder.createInputSurface() 的方式创建的Surface
Surface inputSurface = videoCaptureUtils.mCameraSurface;
mPreviewBuilder.addTarget(inputSurface);
outputs.add(inputSurface);
mCameraDevice.createCaptureSession(outputs, mStateCallBack, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
预览的Surface,录制的Surface,两个是平级关系,各自拿到的是同一个数据大家各玩各的,你做你的显示,我做我的编码,可能连大小、尺寸、比例都不一致,就更不说特效什么的了。
关于这一点后期会讲到。
本文如果贴出的代码有不全的,可以点击源码打开项目进行查看,【传送门】。同时你也可以关注我的开源项目,后续一些改动与优化还有新功能都会持续更新。
https://gitee.com/newki123456/Kotlin-Room
最后推荐一下我做的网站,玩Android: wanandroid.com ,包含详尽的知识体系、好用的工具,还有本公众号文章合集,欢迎体验和收藏!
推荐阅读:
扫一扫 关注我的公众号
如果你想要跟大家分享你的文章,欢迎投稿~
┏(^0^)┛明天见!